%load_ext autoreload
%autoreload 2
%run imports.py
-> Markus: Has tidal model/observations? To put into context with lenn 2011 et al
Summer, LaptevData.xlsx:
Im Anhang findest Du Daten von einer Reise in 2011, auf der Nährstoffe gemessen wurden. Da kannst Du Dir ein Profil rausgreifen als "representativ" für den Laptev shelf. Station 15 ist ungefähr die Station an der in 2014 die MSS Daten gemessen wurden. Blöderweise wurde in 2014 kein Nitrat gemessen.
Guck Dir die Daten mal an und sag Bescheid, ob die für Dich brauchbar wären, dann regel ich später die Rahmenbedingungen.
Winter, LaptevData2.xlsx:
Hier nochmal Winterdaten (TI08) vom April 2008, allerdings sind die nicht ganz so weit nördlich gewesen. Es ist ziemlich frustrierend, bei der darauffolgenden Sommerexpedition (IP08) wurde kein Nitrat gemessen. Verstehe es wer will... Kannst ja mal durchforsten, vielleicht findet sich da ja ein brauchbares Profil.
def load_laptev_no3():
renamedict = {
'Lon [ーE]':'lon',
'Lat [ーN]': 'lat',
'Nitrate [umol/l]': 'nitrate',
'bottle Salinity [psu]': 'sal',
'Cruise': 'cruise',
'mon/day/yr': 'date',
'Temperature [ーC]': 'temp',
'Depth [m]': 'depth',
'Station': 'station'
}
sel = ['date','lat','lon','nitrate','depth','temp','sal','station','cruise']
kwargs = dict(parse_dates=['mon/day/yr'], dtype={'Station': str})
dfw = pd.read_excel('../data/laptev/LaptevData.xlsx', **kwargs)
dfs = pd.read_excel('../data/laptev/LaptevData2.xlsx', **kwargs)
df = pd.concat([dfw, dfs], sort=False)
df = df.rename(columns=renamedict).dropna(how='all', subset=['nitrate'])[sel]
df = df.assign(month=lambda d: d.date.dt.month)
df.depth = pd.to_numeric(pd.cut(df.depth, bins=np.arange(0, 80, 5), labels=np.arange(2.5, 75, 5)))
return df
df = load_laptev_no3()
Station 15 closest to MSS
lenn2011intermittent: NLS station (drift) and CLS station (mooring)
lenn2011 = (
gv.Points((144+56.13/60, 79+14.81/60), ['lon', 'lat'],
label='Lenn et al. 2011, turbulence obs')
* gv.Points((125+55.3/60, 76+44./60), ['lon', 'lat'],
label='Lenn et al. 2011, mooring')
)
None of the profiles in this dataset have a clear nitracline, as opposed to the ones in the Codispoti et al. database.
gf.land * gv.Points(df.loc[df.station=='15'], ['lon', 'lat']).opts(marker='square', size=20) * gv.Points(df, ['lon', 'lat'])
df.loc[df.lat.between(76, 77) & df.lon.between(124, 128)].hvplot('nitrate', 'depth', by='station')
df = df.groupby(['station', 'cruise', 'month'], as_index=False).first()
hv.extension('bokeh')
proj = ccrs.LambertConformal(central_longitude=110, central_latitude=75)
def visaxes(plot, element):
plot.state.axis.visible = True
options = [
opts.Points(
color=hv.Cycle(cmap_to_list(colorcet.cm.glasbey_dark)),
jitter=True, tools=['hover'],
height=500, aspect=1,
size=10, projection=proj,
hooks=[visaxes],
xlabel='x (m)', ylabel='y (m)'
),
opts.Points('MSS station', marker='square', size=20, padding=.1),
opts.Shape(
'Bathymetry',
show_legend=True, line_width=2,
#line_color=hv.Cycle(cmap_to_list(colorcet.cm.glasbey_dark)),
line_color='black',
line_dash=hv.Cycle(['solid', 'dashed', 'dotted']),
),
opts.Shape(
'Land',
fill_color='wheat', line_color='black'
),
opts.Overlay(show_legend=True, legend_position='right'),
]
prof_winter = gv.Points(df.loc[df.month<=6], ['lon', 'lat'], ['date', 'month', 'station'],
crs=ccrs.PlateCarree(), label='Winter')
prof_summer = gv.Points(df.loc[df.month>6], ['lon', 'lat'], ['date', 'month', 'station'],
crs=ccrs.PlateCarree(), label='Summer')
lb = gv.Labels(df, ['lon', 'lat'], 'station')
mss = gv.Points(
df.loc[df.station=='15'].iloc[0:1], ['lon', 'lat'], 'station',
crs=ccrs.PlateCarree(), label='MSS station'
)
# geography
extents = (90, 60, 160, 90)
bbox = box(*extents)
land_feature = gv.feature.land.data.with_scale('10m')
land = gv.Shape(unary_union([geom.intersection(bbox) for geom in land_feature.geometries()])).relabel(group='Land')
bathymetry = hv.Overlay([gv.Shape(bathy.loc[depth].intersection(bbox), group='Bathymetry', label=f'{depth} m isobath') for depth in [50, 200, 1000]])
l = (
bathymetry
* mss
* prof_winter
* prof_summer
* lenn2011
* land
# * gv.feature.rivers.opts(line_width=3, color='blue', scale='110m')
)
l = l.opts(*[options]).redim.range(lon=(100, 150), lat=(71, 80))
fname = '../nb_fig/FIGURE_FN-NEW_LAPTEV_map'
hv.save(l, fname+'.html')
hv.save(l.opts(toolbar=None, clone=True), fname+'.png')
l
mss.data
A2 notebook: codispoti laptev database indicates a well-behaved profile with nitracline gradient of 0.15 mmol m-4, pycnocline gradient 0.2 kg m-4.
# N2 in s-2
N2 = 9.81/1025*0.2
#Krho m2 d-1
Krho = 0.2 * 1e-10 / N2
# FN mmol m-2 d-1
FN = Krho*0.15 * 86400
FN
df = load_laptev_no3()
closest_summer_stations = np.array([4, 5, 20, 25]).astype(str)
df_winter_mean = df.loc[df.month<6].groupby('depth').mean().dropna(how='all')
df_summer_mean = df.loc[df.station.isin(closest_summer_stations)].groupby('depth').mean().dropna(how='all')
df15 = df.loc[df.station=='15']
def load_laptev_mss():
df_list = [pd.read_csv(f'../data/laptev/proc/MSS93eps_{i}.dat', encoding='iso-8859-1',
delim_whitespace=True, header=[0, 1]).assign(profile_number=i)
for i in range(3,8)]
df = pd.concat(df_list)
df.columns = df.columns.droplevel(1)
df['depth'] = gsw.z_from_p(-df.Press, df15.lat.mean())
bins = np.arange(0, 80, 3)
labels = bins[:-1] + 1.5
df.depth = pd.to_numeric(pd.cut(df.depth, bins=bins, labels=labels))
df.epsilon = 10**df.epsilon
return df.rename(columns=dict(Sal='sal'))
df_mss = load_laptev_mss().groupby('depth', as_index=False).median()
dims = dict(
depth=hv.Dimension('depth', label='Depth', unit='m', range=(0, 45)),
nitrate=hv.Dimension('nitrate', label='NO₃ conc.', unit='µmol m⁻² d⁻¹', range=(0, 5)),
sal=hv.Dimension('sal', label='Salinity', unit='-'),
epsilon=hv.Dimension('epsilon', label='TKE Dissipation', unit='W kg⁻¹', range=(1e-11, 1e-9)),
)
hv.Store.register({hv.Overlay: hv.plotting.mpl.OverlayPlot}, 'matplotlib')
hv.extension('matplotlib')
options = [
opts.GridMatrix(
show_legend=True, sizing_mode='stretch_width',
),
opts.Curve(
invert_axes=True, invert_yaxis=True, xaxis='top',
show_grid=True, show_legend=True,
aspect='auto', # frame_width=100, frame_height=400,
padding=.05,
linewidth=2,
),
opts.Curve('eps', logx=True),
opts.Overlay(legend_position='bottom_left'),
]
options = translate_options(options, bokeh2mpl)
label_winter = 'Winter mean'
no3w = hv.Curve(df_winter_mean, 'depth', 'nitrate', label=label_winter)
sw = hv.Curve(df_winter_mean, 'depth', 'sal', label=label_winter)
label_summer = 'Summer mean'
no3s = hv.Curve(df_summer_mean, 'depth', 'nitrate', label=label_summer)
ss = hv.Curve(df_summer_mean, 'depth', 'sal', label=label_summer)
n15 = hv.Curve(df15, 'depth', 'nitrate', label='Station 15')
s15 = hv.Curve(df15, 'depth', 'sal', label='Station 15')
smss = hv.Curve(df_mss, 'depth', 'sal', label='MSS Station')
eps = hv.Curve(df_mss, 'depth', 'epsilon', label='MSS Station', group='eps')
panels = [sw*ss*s15*smss, no3w*no3s*n15, eps*hv.Curve([])]
_ = hv.Layout(panels).opts()
sl = StyleLink(
[[no3w, sw], [no3s, ss], [n15, s15], [smss, eps]],
{
'color': hv.Cycle(cmap_to_list(colorcet.cm.glasbey_dark)),
'linestyle': hv.Cycle(['-', '--', ':', '-.'])
}
)
l = hv.Layout(panels).opts(*options).redim(**dims)
def adjust(fig):
fig.subplots_adjust(bottom=0.1, top=0.85, left=0.1, right=0.95)
fig.set_size_inches((10, 4))
fig = mplrender(l, '../nb_fig/FIGURE_FN-NEW_LAPTEV_profiles', hooks=[adjust])
fig
Nitrate conc. at the MSS station is homogeneous across the whole depth.
Vertically integrated NO3 difference from summer to winter:
mmolN_to_mgC = 106/16 * 12
# delta_nitrate.sum() * 5m(delta z) * 106:16 (mol C: molN) * 12 g C/molC / (mg:g)
deltaN = (df_winter_mean.nitrate - df_summer_mean.nitrate).sum()*5
deltaN* mmolN_to_mgC/1e3
deltaN/50 / 90
dims = {
'Depth': hv.Dimension('Depth', unit='m', range=(0, None)),
'CorNO3': hv.Dimension('CorNO3', label='NO₃', unit='µM', range=(0, None)),
'NO3int': hv.Dimension('NO3int', label='NO₃ deficit', unit='mmol m⁻²', range=(0, None)),
'Date': hv.Dimension('Date', range=(pd.Timestamp('2017-8-1'), pd.Timestamp('2018-8-1')))
}
def load_floats(bin_average=False):
ds = xr.open_dataset('/Users/doppler/papers/winter/nb_temp_data/FLOATS_by_date_derived.nc')
if bin_average:
date_bins = pd.date_range(ds.Date[0].values, ds.Date[-1].values, freq='MS')
date_labels = date_bins[:-1]
depth_bins = np.arange(0., 200, 4)
depth_labels = depth_bins[:-1] + 2
df = ds.to_dataframe().reset_index()
df.Date = pd.cut(df.Date, bins=date_bins, labels=date_labels)
df.Depth = pd.to_numeric(pd.cut(df.Depth, bins=depth_bins, labels=depth_labels))
ds = df.groupby(['Date', 'Depth']).mean().to_xarray()
return ds
ds = load_floats(bin_average=True)
ntr = ds.sel(Depth=ds.Depth<=100.).hvplot.quadmesh('Date', 'Depth', 'CorNO3').opts()
mld = ds.mld.reduce(get_unique, dim='Depth').hvplot(label='Mixed Layer Depth')
def adjust(plot, element):
p = plot.state
c = p.select(dict(type=ColorBar))[0]
c.title = element.vdims[0].pprint_label
c.location = (0, -10)
c.height = 220
p1 = (ntr*mld).opts(
opts.QuadMesh(
width=500, height=310,
hooks=[adjust], tools=['hover'],
invert_yaxis=True,
cmap=cmocean.cm.turbid, colorbar=True,
),
opts.Curve(
color='#12d9e3', line_width=3,
show_legend=True
),
opts.Overlay(legend_position='bottom_left')
)
zref=60
ds = load_floats()
ds = ds.sel(Depth=slice(0, zref))
# mmol/m2
NO3int = (ds.CorNO3.isel(dict(Depth=-1)) - ds.CorNO3).sum(
dim='Depth',min_count=1).rename('NO3int')
#mmol/m2/d
dNO3= NO3int.diff(dim='Date').rename('dNO3')
df_dno3 = NO3int.mean(dim='Float').to_dataframe()
df_dno3_avg = df_dno3.NO3int.rolling(15, center=True).mean().dropna()
p2 = (
df_dno3.reset_index().hvplot.scatter('Date', 'NO3int', color='grey').opts(yticks=[0, 100, 200, 300])
* df_dno3_avg.hvplot(color='k', line_width=3)
).opts(show_legend=False)
hv.renderer('bokeh').theme = theme
l = p1.relabel('A') + p2.relabel('B')
l = l.redim(**dims).opts(
opts.Overlay(width=600, xrotation=30, xlabel='',)
).cols(1)
fname = '../nb_fig/FIGURE_FN-NEW_BAFFIN'
hv.save(l, fname+'.html')
hv.save(l.opts(toolbar=None), fname+'.png')
save_bokeh_svg_multipanel(l, fname, 'v')
l
200/4/30
ds.sel(Depth = ds.Depth <=15).CorNO3.mean(dim=['Depth','Float']).plot(marker='.', ls='')
plt.grid()
Retrieve the the center coordinate of the BGC Argo float observations
df = pd.read_csv('~/papers/winter/nb_temp_data/FLOATS_position_winter_guesses.csv')
coords = df[['Longitude','Latitude']].dropna()
print(
MultiPoint(
[Point(x,y) for x,y in zip(coords.Longitude,coords.Latitude)]
).convex_hull.centroid.wkt
)
POINT (-65.36189062114981 72.22652198050153)
def load_nishino_etal_2015():
return pd.read_csv('../data/fn-compilation-database/nishino2015nutrient/Data_from_Mirai2013.csv',na_values=-999, parse_dates=['yyyy-mm-ddThh:mm:ss.sss'])
df = load_nishino_etal_2015()
df.head(3)
options = [
opts.Curve(invert_yaxis=True, invert_axes=True),
]
no3 = hv.Curve(df, 'Depth [m]', ['Nitrate [~$m~#mol/kg]', 'Station','yyyy-mm-ddThh:mm:ss.sss'])
fn = hv.Curve(df, 'Depth [m]', ['Flux_N1 [mmol/m~^2~#/s]', 'Station'])
l = no3.groupby('Station').overlay() + fn.groupby('Station').overlay()
l.options(*options)
The data comprises three wind events, supposedly leading to enhanced mixing:
First, we'll naively sum the provided depth-dependent nitrate fluxes, which were originally obtained by multiplying dissipation of TKE, buoyancy frequency, and nitrate gradient for each depth interval.
time = df['yyyy-mm-ddThh:mm:ss.sss']
z_interval=[25,40]
iz = (df['Depth [m]']>=z_interval[0]) & (df['Depth [m]']<=z_interval[1])
ii1 = (
(time <= dt.datetime(2013,9,15,23,59,59)) &
(time >= dt.datetime(2013,9,14,0,0,0))
)
ii2 = (
(time <= dt.datetime(2013,9,21,23,59,59)) &
(time >= dt.datetime(2013,9,19,0,0,0))
)
ii3 = (
(time <= dt.datetime(2013,9,25,23,59,59)) &
(time >= dt.datetime(2013,9,24,0,0,0))
)
for the_vars in [
['Flux_N1 [mmol/m~^2~#/s]','Flux_N2 [mmol/m~^2~#/s]'],
['Krho1 [m~^2~#/s]', 'Krho2 [m~^2~#/s]']
]:
if the_vars[0].startswith('Krho'):
print('Krho in m2/d:')
elif the_vars[0].startswith('Flux_N'):
print('Nitrate flux in mmmol/m2/d:')
print('Each wind event:')
for ii in [ii1,ii2,ii3]:
print(df.loc[ii & iz, the_vars].mean().mean()*86400)
print('All wind events:')
print(df.loc[(ii1 | ii2 | ii3) & iz,the_vars].mean().mean()*86400)
print('Entire time series:')
var_mean = df.loc[iz, the_vars].mean().mean()*86400
print(var_mean)
if the_vars[0].startswith('Krho'):
Krho = var_mean
elif the_vars[0].startswith('Flux_N'):
FN = var_mean
print()
Second, as a more reliable means of averaging the turbulent flux over the (non-turbulent) background field, we determine the nitrate gradient over the same depth interval and multiply that by the average eddy diffusivity:
df = df.groupby(['Depth [m]']).mean()
df = df.loc[(df.index>=z_interval[0]) & (df.index <= z_interval[1]), 'Nitrate [~$m~#mol/kg]']
# .apply(lambda x: np.polyfit(df.index, x, 1)[0])
print('dNO3/dz in uM/m:')
no3_slope = np.polyfit(df.index,df.values, 1)[0]
no3_slope
The above first estimate is consistent with this second, arguably more accurate, averaging method:
Krho * no3_slope
df = load_nishino_etal_2015()
df.loc[df['Depth [m]']<=10, 'Nitrate [~$m~#mol/kg]'].mean()
d = df.groupby('Station').first()
LineString(zip(d['Longitude [degrees_east]'],d['Latitude [degrees_north]'])).centroid.wkt
360-191.75